In [1]:
import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
In [2]:
from nmfd_gnn import NMFD_GNN

1: set parameters¶

In [3]:
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
In [4]:
#1.1: settings
M = 10                       #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)

#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16  #batch size for evaluation and test
delta_ratio = 0.1   #the ratio of delta in the standard deviation of flow

hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
         "beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy, "delta_ratio": delta_ratio}

gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10    #column dimension of L1, L2
c_k = 5.5     #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.757, 4.996, -2.409, 1.638, 3.569] 

hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
               "p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30

#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
    file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_10_missing_50

2: visualization¶

In [5]:
def visualize_train_loss(total_phy_flow_occ_loss):
    plt.figure(figsize=(4,3), dpi=75)
    t_p_f_o_l = np.array(total_phy_flow_occ_loss)
    e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
    x = range(len(e_loss))
    plt.plot(x, p_loss, linewidth=1, label = "phy loss")
    plt.plot(x, f_loss, linewidth=1, label = "flow loss")
    plt.plot(x, o_loss, linewidth=1, label = "occ loss")
    plt.legend()
    plt.title('Loss decline on train')
    plt.xlabel('Epoch')
    plt.ylabel('Loss')
    plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_flow_loss(vali_f_mae, test_f_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_f_mae))    
    plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
    plt.plot(x, test_f_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of flow on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE (veh/h)')
    plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_occ_loss(vali_o_mae, test_o_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_o_mae))    
    plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
    plt.plot(x, test_o_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of occupancy on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE')
    plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
    plt.show()

3: compute the error¶

In [6]:
def MAELoss(yhat, y):
    return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))

def RMSELoss(yhat, y):
    return float(torch.sqrt(torch.mean((yhat-y)**2)))

def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):    
    flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
    f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
    for i in range(0, n, b_s_vt):
        s, e = i, np.min([i+b_s_vt, n])
        num_list.append(e-s)
        bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]  
        bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
        bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
        bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
        bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
        f_mae_list.append(bf_mae)
        f_rmse_list.append(bf_rmse)
        o_mae_list.append(bo_mae)
        o_rmse_list.append(bo_rmse)
    f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
    f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
    o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
    return f_mae, f_rmse, o_mae, o_rmse

def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
    vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse  =\
         vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
    return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse

4: train¶

In [7]:
import torch
In [8]:
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ, delta): 
    #f: flow; o: occupancy
    model.train()
    losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
    
    beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
    n = len(train_f_x)
    print ("# batch: ", int(n/b_s))   
    
    for i in range(0, n-b_s, b_s):
        time1 = time.time()
        x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]   
        x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]

        opt.zero_grad() 
        y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
        
        #p_loss = criterion(q_hat, q_theta).cpu()                #physical loss 
        #p_loss = p_loss/flow_std_squ
        
        #hinge loss
        q_gap = q_hat - q_theta       
        delta_gap = torch.ones(q_gap.shape, device=device)*delta
        zero_gap = torch.zeros(q_gap.shape, device=device)            #(n, m)
        hl_loss = torch.max(q_gap-delta_gap, zero_gap) + torch.max(-delta_gap-q_gap, zero_gap) 
        hl_loss = hl_loss/flow_std_squ
        p_loss = criterion(hl_loss, zero_gap).cpu()            #(n, m)
        f_loss = criterion(y_f_hat.cpu(), y_f_batch)              #data loss of flow
        o_loss = criterion(y_o_hat.cpu(), y_o_batch)              #data loss of occupancy
        
        loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
        
        loss.backward()
        opt.step()
        losses.append(loss.data.numpy())
        p_losses.append(p_loss.data.numpy())
        f_losses.append(f_loss.data.numpy())
        o_losses.append(o_loss.data.numpy())
        
        if i % (64*b_s) == 0:
            print ("i_batch: ", i/b_s)
            print ("the loss for this batch: ", loss.data.numpy())
            print ("flow loss", f_loss.data.numpy())
            print ("occ loss", o_loss.data.numpy())
            time2 = time.time()
            print ("time for this batch", time2-time1)
            print ("----------------------------------")
        n_loss = float(len(losses)+0.000001)
        aver_loss = sum(losses)/n_loss
        aver_p_loss = sum(p_losses)/n_loss
        aver_f_loss = sum(f_losses)/n_loss
        aver_o_loss = sum(o_losses)/n_loss
    return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss

#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
    total_phy_flow_occ_loss = list()
    
    n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
    f_std = f_o_mean_std[1]
    
    vali_f, vali_o = vali["flow"], vali["occupancy"] 
    vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device) 
    test_f, test_o = test["flow"], test["occupancy"] 
    test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device) 
    
    l_r, n_e = hyper["l_r"], hyper["n_e"]
    opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
    opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
    
    print ("# epochs ", n_e)
    r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
    r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
    
    flow_std_squ = np.power(f_std, 2)
    
    no_decrease = 0
    for i in range(n_e):
        print ("----------------an epoch starts-------------------")
        #time1_s = time.time()
        
        time_s = time.time()
        print ("i_epoch: ", i)
        n_train = len(train["flow"])
        number_list = copy.copy(list(range(n_train)))
        random.shuffle(number_list, random = r)
        shuffle_idx = torch.tensor(number_list)
        train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
        train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx] 
        
        delta = hyper["delta_ratio"] * f_std
        aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
            train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
                        train_x_o.to(device), train_y_o, hyper, flow_std_squ, delta)
        opt_scheduler.step()
        
        total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
        print ("train loss for this epoch: ", round(aver_loss, 6))
        
        #evaluate
        b_s_vt = hyper["b_s_vt"]
        vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
            evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
        test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
            evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)  
        
        r_vali_f_mae.append(vali_f_mae)
        r_test_f_mae.append(test_f_mae)
        r_vali_o_mae.append(vali_o_mae)
        r_test_o_mae.append(test_o_mae)
        r_vali_f_rmse.append(vali_f_rmse)
        r_test_f_rmse.append(test_f_rmse)
        r_vali_o_rmse.append(vali_o_rmse)
        r_test_o_rmse.append(test_o_rmse)
        
        visualize_train_loss(total_phy_flow_occ_loss)
        visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
        visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
        time_e = time.time()
        print ("time for this epoch", time_e - time_s)
        
        performance = {"train": total_phy_flow_occ_loss,\
                  "vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
                  "test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
        subfile =  open(file_name + '/' + 'performance'+'.json','w')
        json.dump(performance, subfile)
        subfile.close()
        
        #early stop
        flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
        norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
        norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
        
        if n_mse_flow_occ > 0:
            min_until_now = min([min_until_now, norm_sum_mse])
        else:
            min_until_now = 1000000.0  
        if norm_sum_mse > min_until_now:
            no_decrease = no_decrease+1
        else:
            no_decrease = 0
        if no_decrease == max_no_decrease:
            print ("Early stop at the " + str(i+1) + "-th epoch")
            return total_phy_flow_occ_loss, model 
        n_mse_flow_occ = n_mse_flow_occ + 1
        
        print ("No_decrease: ", no_decrease)
    return total_phy_flow_occ_loss, model    

5: prepare tensors¶

In [9]:
def tensorize(train_vali_test):
    result = dict()
    result["flow"] = torch.tensor(train_vali_test["flow"]) 
    result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])     
    result["occupancy"] = torch.tensor(train_vali_test["occupancy"]) 
    result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"]) 
    return result

def normalize_flow_occ(tvt, f_o_mean_std):  #tvt: train, vali, test
    #flow
    f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
    f_mask, f = tvt["flow_mask"], tvt["flow"]
    tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
    tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
    
    #occ
    o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
    o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
    tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
    tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()   
    return tvt

def transform_distance(d_matrix):
    sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
    sigma_square = sigma*sigma
    for i in range(n_row):
        for j in range(n_col):
            d_i_j = d_matrix[i][j]
            d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
    return d_matrix

def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
    mean_std = json.load(open(mean_std_path))
    f_mean, f_std, o_mean, o_std =\
        mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
    f_o_mean_std = [f_mean, f_std, o_mean, o_std]
    
    train = json.load(open(train_path))
    vali = json.load(open(vali_path))
    test = json.load(open(test_path))
    adj = json.load(open(sensor_adj_path))["adj"]
    n_sensor = len(train["flow"][0])    
    
    train = tensorize(normalize_flow_occ(train, f_o_mean_std))
    vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
    test = tensorize(normalize_flow_occ(test, f_o_mean_std))

    adj = torch.tensor(transform_distance(adj), device=device).float()   
    
    df_sensor_id = json.load(open(sensor_id_path))
    sensor_length = [0.0 for i in range(n_sensor)]
    for sensor in df_sensor_id:
        sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
        
    return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length

6: main¶

In [10]:
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
    load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
11.363991022109985
In [11]:
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
2007
663
663
[240.33475289317576, 220.77174415099844, 0.13747677267259475, 0.19174061724441546]
In [12]:
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)   
cri = nn.MSELoss() 
In [13]:
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs  200
----------------an epoch starts-------------------
i_epoch:  0
# batch:  125
i_batch:  0.0
the loss for this batch:  1.8414308
flow loss 0.92753893
occ loss 0.9138899
time for this batch 0.8563082218170166
----------------------------------
i_batch:  64.0
the loss for this batch:  0.45965022
flow loss 0.16798665
occ loss 0.29166114
time for this batch 0.21345853805541992
----------------------------------
train loss for this epoch:  0.587177
time for this epoch 38.55298638343811
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  1
# batch:  125
i_batch:  0.0
the loss for this batch:  0.35231787
flow loss 0.13469231
occ loss 0.21762325
time for this batch 0.21337890625
----------------------------------
i_batch:  64.0
the loss for this batch:  0.36744604
flow loss 0.13052775
occ loss 0.23691517
time for this batch 0.26166820526123047
----------------------------------
train loss for this epoch:  0.392313
time for this epoch 38.482863664627075
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  2
# batch:  125
i_batch:  0.0
the loss for this batch:  0.33114082
flow loss 0.12518759
occ loss 0.20595041
time for this batch 0.20287013053894043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.35361248
flow loss 0.11542369
occ loss 0.2381856
time for this batch 0.25232625007629395
----------------------------------
train loss for this epoch:  0.355434
time for this epoch 36.6760196685791
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  3
# batch:  125
i_batch:  0.0
the loss for this batch:  0.32903153
flow loss 0.10883176
occ loss 0.22019683
time for this batch 0.16118454933166504
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4018452
flow loss 0.12559749
occ loss 0.27624372
time for this batch 0.25580763816833496
----------------------------------
train loss for this epoch:  0.335364
time for this epoch 40.8969669342041
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  4
# batch:  125
i_batch:  0.0
the loss for this batch:  0.34775785
flow loss 0.113414854
occ loss 0.23433991
time for this batch 0.19744467735290527
----------------------------------
i_batch:  64.0
the loss for this batch:  0.30532902
flow loss 0.09690206
occ loss 0.2084244
time for this batch 0.19155573844909668
----------------------------------
train loss for this epoch:  0.323942
time for this epoch 36.409560680389404
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  5
# batch:  125
i_batch:  0.0
the loss for this batch:  0.28858554
flow loss 0.09510437
occ loss 0.19347851
time for this batch 0.20176434516906738
----------------------------------
i_batch:  64.0
the loss for this batch:  0.44640762
flow loss 0.12631658
occ loss 0.3200865
time for this batch 0.26904845237731934
----------------------------------
train loss for this epoch:  0.315001
time for this epoch 40.9627161026001
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  6
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2293528
flow loss 0.07938108
occ loss 0.14996979
time for this batch 0.23567724227905273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.31748736
flow loss 0.099219225
occ loss 0.21826546
time for this batch 0.27227163314819336
----------------------------------
train loss for this epoch:  0.308621
time for this epoch 42.028258085250854
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  7
# batch:  125
i_batch:  0.0
the loss for this batch:  0.359917
flow loss 0.10279307
occ loss 0.25712052
time for this batch 0.23253202438354492
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23682478
flow loss 0.080204874
occ loss 0.15661769
time for this batch 0.2864696979522705
----------------------------------
train loss for this epoch:  0.30278
time for this epoch 40.45991134643555
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  8
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26747543
flow loss 0.0869026
occ loss 0.18057041
time for this batch 0.21356415748596191
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19863705
flow loss 0.06230309
occ loss 0.13633226
time for this batch 0.25872015953063965
----------------------------------
train loss for this epoch:  0.299851
time for this epoch 40.90656137466431
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  9
# batch:  125
i_batch:  0.0
the loss for this batch:  0.38338393
flow loss 0.10330619
occ loss 0.28007442
time for this batch 0.24542784690856934
----------------------------------
i_batch:  64.0
the loss for this batch:  0.31288683
flow loss 0.08749831
occ loss 0.22538568
time for this batch 0.28807854652404785
----------------------------------
train loss for this epoch:  0.295074
time for this epoch 42.01219916343689
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  10
# batch:  125
i_batch:  0.0
the loss for this batch:  0.30172086
flow loss 0.08286461
occ loss 0.21885397
time for this batch 0.17701101303100586
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1774476
flow loss 0.05980489
occ loss 0.117640994
time for this batch 0.2378087043762207
----------------------------------
train loss for this epoch:  0.290899
time for this epoch 41.144152879714966
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  11
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21729904
flow loss 0.072451875
occ loss 0.14484504
time for this batch 0.19814038276672363
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3110581
flow loss 0.084993854
occ loss 0.22606127
time for this batch 0.2786521911621094
----------------------------------
train loss for this epoch:  0.288089
time for this epoch 41.984994888305664
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  12
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23095897
flow loss 0.07241725
occ loss 0.15853967
time for this batch 0.22017216682434082
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29783103
flow loss 0.08573996
occ loss 0.21208805
time for this batch 0.2705237865447998
----------------------------------
train loss for this epoch:  0.285921
time for this epoch 41.82296586036682
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  13
# batch:  125
i_batch:  0.0
the loss for this batch:  0.39048958
flow loss 0.10101222
occ loss 0.28947335
time for this batch 0.20934724807739258
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2531637
flow loss 0.07855044
occ loss 0.17461075
time for this batch 0.25981950759887695
----------------------------------
train loss for this epoch:  0.281732
time for this epoch 41.781925678253174
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  14
# batch:  125
i_batch:  0.0
the loss for this batch:  0.28371674
flow loss 0.0718159
occ loss 0.21189791
time for this batch 0.2093338966369629
----------------------------------
i_batch:  64.0
the loss for this batch:  0.33691236
flow loss 0.090220265
occ loss 0.24668878
time for this batch 0.27156543731689453
----------------------------------
train loss for this epoch:  0.279631
time for this epoch 41.96280789375305
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  15
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26893407
flow loss 0.08602584
occ loss 0.18290508
time for this batch 0.22816872596740723
----------------------------------
i_batch:  64.0
the loss for this batch:  0.32263392
flow loss 0.08842536
occ loss 0.234205
time for this batch 0.25699853897094727
----------------------------------
train loss for this epoch:  0.27791
time for this epoch 42.53934407234192
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  16
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2622934
flow loss 0.0724553
occ loss 0.18983549
time for this batch 0.19074463844299316
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2571864
flow loss 0.07567566
occ loss 0.18150793
time for this batch 0.26926493644714355
----------------------------------
train loss for this epoch:  0.273905
time for this epoch 41.66872477531433
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  17
# batch:  125
i_batch:  0.0
the loss for this batch:  0.31030566
flow loss 0.08076872
occ loss 0.2295345
time for this batch 0.20749306678771973
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22077945
flow loss 0.07936062
occ loss 0.14141622
time for this batch 0.2544131278991699
----------------------------------
train loss for this epoch:  0.27221
time for this epoch 40.98801779747009
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  18
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23837574
flow loss 0.067498796
occ loss 0.17087454
time for this batch 0.2258148193359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21676534
flow loss 0.06688463
occ loss 0.1498782
time for this batch 0.26673054695129395
----------------------------------
train loss for this epoch:  0.271173
time for this epoch 40.42730116844177
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  19
# batch:  125
i_batch:  0.0
the loss for this batch:  0.31564602
flow loss 0.0915236
occ loss 0.22411901
time for this batch 0.23389101028442383
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19984283
flow loss 0.059440665
occ loss 0.14040017
time for this batch 0.26729345321655273
----------------------------------
train loss for this epoch:  0.270346
time for this epoch 41.8628625869751
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  20
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19777612
flow loss 0.059666242
occ loss 0.13810794
time for this batch 0.2031114101409912
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19217199
flow loss 0.05998134
occ loss 0.13218854
time for this batch 0.2673203945159912
----------------------------------
train loss for this epoch:  0.266878
time for this epoch 40.95800161361694
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  21
# batch:  125
i_batch:  0.0
the loss for this batch:  0.3207984
flow loss 0.09282313
occ loss 0.22797197
time for this batch 0.23520421981811523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25230244
flow loss 0.073977344
occ loss 0.17832212
time for this batch 0.28711915016174316
----------------------------------
train loss for this epoch:  0.264954
time for this epoch 41.68501543998718
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  22
# batch:  125
i_batch:  0.0
the loss for this batch:  0.3002748
flow loss 0.078140445
occ loss 0.22213136
time for this batch 0.14823031425476074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21743105
flow loss 0.06585655
occ loss 0.15157181
time for this batch 0.26244163513183594
----------------------------------
train loss for this epoch:  0.264003
time for this epoch 41.020365715026855
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  23
# batch:  125
i_batch:  0.0
the loss for this batch:  0.27240628
flow loss 0.07974536
occ loss 0.19265807
time for this batch 0.20927691459655762
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20353127
flow loss 0.06433734
occ loss 0.13919166
time for this batch 0.2827889919281006
----------------------------------
train loss for this epoch:  0.262331
time for this epoch 42.18397283554077
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  24
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23034048
flow loss 0.0649897
occ loss 0.16534798
time for this batch 0.23336386680603027
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25201347
flow loss 0.07650952
occ loss 0.17550077
time for this batch 0.2786085605621338
----------------------------------
train loss for this epoch:  0.259506
time for this epoch 43.78471255302429
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  25
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2939579
flow loss 0.076458715
occ loss 0.21749598
time for this batch 0.2827460765838623
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27025634
flow loss 0.07978802
occ loss 0.19046497
time for this batch 0.27390336990356445
----------------------------------
train loss for this epoch:  0.258613
time for this epoch 43.781771659851074
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  26
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26372555
flow loss 0.075827956
occ loss 0.18789448
time for this batch 0.21874785423278809
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19777387
flow loss 0.061935812
occ loss 0.13583581
time for this batch 0.27890777587890625
----------------------------------
train loss for this epoch:  0.256407
time for this epoch 41.92899250984192
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  27
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19545592
flow loss 0.05884064
occ loss 0.13661303
time for this batch 0.22065210342407227
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21640669
flow loss 0.069536775
occ loss 0.14686774
time for this batch 0.24285030364990234
----------------------------------
train loss for this epoch:  0.255269
time for this epoch 41.37053155899048
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  28
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23868074
flow loss 0.072132014
occ loss 0.166546
time for this batch 0.23404264450073242
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29006562
flow loss 0.07844192
occ loss 0.21162091
time for this batch 0.27571606636047363
----------------------------------
train loss for this epoch:  0.253693
time for this epoch 42.034462451934814
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  29
# batch:  125
i_batch:  0.0
the loss for this batch:  0.30590454
flow loss 0.0776983
occ loss 0.22820322
time for this batch 0.2087714672088623
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28522795
flow loss 0.079841286
occ loss 0.2053834
time for this batch 0.2841835021972656
----------------------------------
train loss for this epoch:  0.253401
time for this epoch 41.53965520858765
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  30
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17239751
flow loss 0.050254025
occ loss 0.12214154
time for this batch 0.2331235408782959
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27989775
flow loss 0.07544826
occ loss 0.20444661
time for this batch 0.2568094730377197
----------------------------------
train loss for this epoch:  0.251431
time for this epoch 41.88278651237488
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  31
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1845639
flow loss 0.058050126
occ loss 0.12651177
time for this batch 0.2239704132080078
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2504021
flow loss 0.07284675
occ loss 0.17755237
time for this batch 0.2834770679473877
----------------------------------
train loss for this epoch:  0.250066
time for this epoch 41.758960485458374
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  32
# batch:  125
i_batch:  0.0
the loss for this batch:  0.14841044
flow loss 0.05169462
occ loss 0.09671416
time for this batch 0.2072460651397705
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25223988
flow loss 0.06670359
occ loss 0.18553351
time for this batch 0.24680519104003906
----------------------------------
train loss for this epoch:  0.251804
time for this epoch 41.487696409225464
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  33
# batch:  125
i_batch:  0.0
the loss for this batch:  0.15456451
flow loss 0.04820431
occ loss 0.10635851
time for this batch 0.22594022750854492
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28128454
flow loss 0.08163659
occ loss 0.19964458
time for this batch 0.21125078201293945
----------------------------------
train loss for this epoch:  0.24891
time for this epoch 40.78103303909302
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  34
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22518888
flow loss 0.0696722
occ loss 0.15551397
time for this batch 0.24277210235595703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18728565
flow loss 0.05942203
occ loss 0.12786134
time for this batch 0.28765082359313965
----------------------------------
train loss for this epoch:  0.24662
time for this epoch 42.79044532775879
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  35
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2803213
flow loss 0.078496926
occ loss 0.20182058
time for this batch 0.23141789436340332
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1861556
flow loss 0.057315033
occ loss 0.12883846
time for this batch 0.2705566883087158
----------------------------------
train loss for this epoch:  0.246866
time for this epoch 40.86542367935181
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  36
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26851544
flow loss 0.074023634
occ loss 0.19448878
time for this batch 0.21842455863952637
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28146464
flow loss 0.082633354
occ loss 0.19882771
time for this batch 0.2658841609954834
----------------------------------
train loss for this epoch:  0.245788
time for this epoch 41.10043382644653
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  37
# batch:  125
i_batch:  0.0
the loss for this batch:  0.3166088
flow loss 0.08244415
occ loss 0.23416162
time for this batch 0.17280006408691406
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25992987
flow loss 0.07314552
occ loss 0.18678135
time for this batch 0.2594292163848877
----------------------------------
train loss for this epoch:  0.245806
time for this epoch 41.91363716125488
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  38
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21781844
flow loss 0.0696523
occ loss 0.14816374
time for this batch 0.2400829792022705
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27088434
flow loss 0.073750064
occ loss 0.19713148
time for this batch 0.28087902069091797
----------------------------------
train loss for this epoch:  0.243536
time for this epoch 41.94390082359314
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  39
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26977247
flow loss 0.06776723
occ loss 0.20200236
time for this batch 0.2314596176147461
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25797474
flow loss 0.07904191
occ loss 0.1789293
time for this batch 0.268707275390625
----------------------------------
train loss for this epoch:  0.243398
time for this epoch 41.088252544403076
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  40
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23710899
flow loss 0.07066212
occ loss 0.16644365
time for this batch 0.2319347858428955
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25292194
flow loss 0.06724407
occ loss 0.18567508
time for this batch 0.26622867584228516
----------------------------------
train loss for this epoch:  0.242674
time for this epoch 41.14931130409241
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  41
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21543492
flow loss 0.06684609
occ loss 0.14858593
time for this batch 0.22763657569885254
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22089633
flow loss 0.06647388
occ loss 0.15441999
time for this batch 0.28270387649536133
----------------------------------
train loss for this epoch:  0.242132
time for this epoch 41.96283316612244
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  42
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22940822
flow loss 0.06785363
occ loss 0.16155206
time for this batch 0.24377155303955078
----------------------------------
i_batch:  64.0
the loss for this batch:  0.267352
flow loss 0.0801972
occ loss 0.18715137
time for this batch 0.2729218006134033
----------------------------------
train loss for this epoch:  0.241839
time for this epoch 41.81826853752136
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  43
# batch:  125
i_batch:  0.0
the loss for this batch:  0.24201745
flow loss 0.06653492
occ loss 0.17547947
time for this batch 0.26564455032348633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15275988
flow loss 0.049519192
occ loss 0.10323894
time for this batch 0.267697811126709
----------------------------------
train loss for this epoch:  0.240476
time for this epoch 41.50154495239258
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  44
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2307256
flow loss 0.06269788
occ loss 0.16802499
time for this batch 0.20597267150878906
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22507973
flow loss 0.061811756
occ loss 0.16326539
time for this batch 0.2798471450805664
----------------------------------
train loss for this epoch:  0.240497
time for this epoch 40.25916075706482
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  45
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21366076
flow loss 0.06418137
occ loss 0.14947638
time for this batch 0.23711609840393066
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2751279
flow loss 0.073575236
occ loss 0.20154937
time for this batch 0.275968074798584
----------------------------------
train loss for this epoch:  0.240259
time for this epoch 41.044926166534424
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  46
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19972643
flow loss 0.06434933
occ loss 0.13537459
time for this batch 0.24289393424987793
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28022254
flow loss 0.074966766
occ loss 0.20525222
time for this batch 0.27210521697998047
----------------------------------
train loss for this epoch:  0.238622
time for this epoch 41.13613772392273
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  47
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26079506
flow loss 0.08136107
occ loss 0.17943123
time for this batch 0.21535491943359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23882875
flow loss 0.06660217
occ loss 0.17222361
time for this batch 0.2757711410522461
----------------------------------
train loss for this epoch:  0.24061
time for this epoch 41.32854652404785
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  48
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2708486
flow loss 0.07165233
occ loss 0.19919315
time for this batch 0.22971105575561523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22470511
flow loss 0.06773043
occ loss 0.15697223
time for this batch 0.2869706153869629
----------------------------------
train loss for this epoch:  0.238984
time for this epoch 42.12303566932678
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  49
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26478952
flow loss 0.06791037
occ loss 0.19687603
time for this batch 0.23865866661071777
----------------------------------
i_batch:  64.0
the loss for this batch:  0.30252996
flow loss 0.0723655
occ loss 0.23016159
time for this batch 0.25132250785827637
----------------------------------
train loss for this epoch:  0.236611
time for this epoch 41.77879548072815
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  50
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22394107
flow loss 0.07003973
occ loss 0.15389822
time for this batch 0.19198870658874512
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16482924
flow loss 0.04989575
occ loss 0.11493146
time for this batch 0.27187204360961914
----------------------------------
train loss for this epoch:  0.236511
time for this epoch 42.3287250995636
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  51
# batch:  125
i_batch:  0.0
the loss for this batch:  0.27259412
flow loss 0.072005644
occ loss 0.20058496
time for this batch 0.2225797176361084
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17057993
flow loss 0.049272183
occ loss 0.1213058
time for this batch 0.265413761138916
----------------------------------
train loss for this epoch:  0.237859
time for this epoch 42.6271436214447
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  52
# batch:  125
i_batch:  0.0
the loss for this batch:  0.13954811
flow loss 0.04150943
occ loss 0.09803713
time for this batch 0.23494291305541992
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29853383
flow loss 0.077072755
occ loss 0.22145785
time for this batch 0.27828073501586914
----------------------------------
train loss for this epoch:  0.236755
time for this epoch 41.6546585559845
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  53
# batch:  125
i_batch:  0.0
the loss for this batch:  0.35413265
flow loss 0.08601096
occ loss 0.26811767
time for this batch 0.24108290672302246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27366218
flow loss 0.0708987
occ loss 0.20276068
time for this batch 0.2732582092285156
----------------------------------
train loss for this epoch:  0.235839
time for this epoch 41.75529193878174
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  54
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25286707
flow loss 0.067940444
occ loss 0.18492398
time for this batch 0.21805167198181152
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29418078
flow loss 0.07862101
occ loss 0.21555601
time for this batch 0.27674198150634766
----------------------------------
train loss for this epoch:  0.235396
time for this epoch 41.89839220046997
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  55
# batch:  125
i_batch:  0.0
the loss for this batch:  0.24594778
flow loss 0.07038941
occ loss 0.17555544
time for this batch 0.2189779281616211
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2277434
flow loss 0.06383535
occ loss 0.16390567
time for this batch 0.27669787406921387
----------------------------------
train loss for this epoch:  0.235576
time for this epoch 41.25068259239197
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  56
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21428227
flow loss 0.05929067
occ loss 0.15498929
time for this batch 0.21587085723876953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22546466
flow loss 0.065730244
occ loss 0.15973218
time for this batch 0.26003551483154297
----------------------------------
train loss for this epoch:  0.23479
time for this epoch 42.20950508117676
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  57
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23544924
flow loss 0.06676033
occ loss 0.16868562
time for this batch 0.2706787586212158
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26058158
flow loss 0.070931725
occ loss 0.18964678
time for this batch 0.2614607810974121
----------------------------------
train loss for this epoch:  0.234689
time for this epoch 41.941227436065674
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  58
# batch:  125
i_batch:  0.0
the loss for this batch:  0.29303578
flow loss 0.07839674
occ loss 0.21463518
time for this batch 0.2645082473754883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21292523
flow loss 0.058307108
occ loss 0.15461567
time for this batch 0.2754666805267334
----------------------------------
train loss for this epoch:  0.234621
time for this epoch 43.64133954048157
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  59
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1903666
flow loss 0.05954063
occ loss 0.13082351
time for this batch 0.24034762382507324
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22620767
flow loss 0.06204022
occ loss 0.16416427
time for this batch 0.28040218353271484
----------------------------------
train loss for this epoch:  0.233526
time for this epoch 41.244598388671875
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  60
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21300173
flow loss 0.06433138
occ loss 0.14866771
time for this batch 0.22133302688598633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23951547
flow loss 0.06289869
occ loss 0.17661375
time for this batch 0.27775096893310547
----------------------------------
train loss for this epoch:  0.234641
time for this epoch 44.08344483375549
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  61
# batch:  125
i_batch:  0.0
the loss for this batch:  0.24006954
flow loss 0.06628541
occ loss 0.17378137
time for this batch 0.2300126552581787
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18168435
flow loss 0.05128265
occ loss 0.13039954
time for this batch 0.26367616653442383
----------------------------------
train loss for this epoch:  0.233262
time for this epoch 42.84070420265198
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  62
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19604118
flow loss 0.05711527
occ loss 0.13892348
time for this batch 0.2388167381286621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2803127
flow loss 0.07747343
occ loss 0.20283599
time for this batch 0.27002859115600586
----------------------------------
train loss for this epoch:  0.232411
time for this epoch 41.81299138069153
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  63
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22629365
flow loss 0.059519976
occ loss 0.1667716
time for this batch 0.23772883415222168
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23676449
flow loss 0.069682054
occ loss 0.16707934
time for this batch 0.2672078609466553
----------------------------------
train loss for this epoch:  0.232456
time for this epoch 42.337172508239746
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  64
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25797018
flow loss 0.067976326
occ loss 0.1899906
time for this batch 0.2072889804840088
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3022592
flow loss 0.07300302
occ loss 0.22925326
time for this batch 0.28032994270324707
----------------------------------
train loss for this epoch:  0.232081
time for this epoch 41.24631381034851
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  65
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23990403
flow loss 0.066396624
occ loss 0.17350468
time for this batch 0.21597576141357422
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2985823
flow loss 0.076931745
occ loss 0.22164688
time for this batch 0.2557492256164551
----------------------------------
train loss for this epoch:  0.231326
time for this epoch 41.13559031486511
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  66
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17269409
flow loss 0.048701037
occ loss 0.12399069
time for this batch 0.23293089866638184
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22518955
flow loss 0.072024345
occ loss 0.15316248
time for this batch 0.269197940826416
----------------------------------
train loss for this epoch:  0.230385
time for this epoch 41.70528483390808
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  67
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26929533
flow loss 0.07363257
occ loss 0.1956591
time for this batch 0.2330162525177002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21482153
flow loss 0.0606987
occ loss 0.15411977
time for this batch 0.2798597812652588
----------------------------------
train loss for this epoch:  0.231396
time for this epoch 41.43988490104675
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  68
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1823574
flow loss 0.053852692
occ loss 0.12850223
time for this batch 0.24834775924682617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2554196
flow loss 0.06715931
occ loss 0.18825717
time for this batch 0.24804353713989258
----------------------------------
train loss for this epoch:  0.23111
time for this epoch 41.638182163238525
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  69
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18190786
flow loss 0.05957433
occ loss 0.122331485
time for this batch 0.20174479484558105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16822173
flow loss 0.04620324
occ loss 0.122016504
time for this batch 0.24509310722351074
----------------------------------
train loss for this epoch:  0.230039
time for this epoch 41.17905640602112
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  70
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25584415
flow loss 0.07373919
occ loss 0.182102
time for this batch 0.21828007698059082
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22296053
flow loss 0.063603505
occ loss 0.15935414
time for this batch 0.2734956741333008
----------------------------------
train loss for this epoch:  0.229927
time for this epoch 42.03036093711853
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  71
# batch:  125
i_batch:  0.0
the loss for this batch:  0.28136772
flow loss 0.07189265
occ loss 0.20947167
time for this batch 0.2047586441040039
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2532812
flow loss 0.069291525
occ loss 0.18398707
time for this batch 0.24064397811889648
----------------------------------
train loss for this epoch:  0.230344
time for this epoch 42.032185554504395
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  72
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2469576
flow loss 0.066514015
occ loss 0.18044071
time for this batch 0.2347571849822998
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23504862
flow loss 0.06359307
occ loss 0.17145264
time for this batch 0.2872464656829834
----------------------------------
train loss for this epoch:  0.230055
time for this epoch 41.73973894119263
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  73
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19880849
flow loss 0.058283947
occ loss 0.14052165
time for this batch 0.2162022590637207
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25293875
flow loss 0.07175293
occ loss 0.18118227
time for this batch 0.2678062915802002
----------------------------------
train loss for this epoch:  0.228217
time for this epoch 41.64320683479309
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  74
# batch:  125
i_batch:  0.0
the loss for this batch:  0.32239988
flow loss 0.080274716
occ loss 0.24212143
time for this batch 0.21129226684570312
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22052734
flow loss 0.06282087
occ loss 0.15770333
time for this batch 0.279437780380249
----------------------------------
train loss for this epoch:  0.229702
time for this epoch 41.94496417045593
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  75
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19973174
flow loss 0.056763556
occ loss 0.14296544
time for this batch 0.20404505729675293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26889843
flow loss 0.07246931
occ loss 0.19642626
time for this batch 0.2765214443206787
----------------------------------
train loss for this epoch:  0.228939
time for this epoch 41.78610634803772
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  76
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23010777
flow loss 0.057520706
occ loss 0.1725845
time for this batch 0.23522090911865234
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16722693
flow loss 0.04741858
occ loss 0.119806185
time for this batch 0.23984527587890625
----------------------------------
train loss for this epoch:  0.228746
time for this epoch 42.28582262992859
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  77
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21463917
flow loss 0.060492564
occ loss 0.15414426
time for this batch 0.23732829093933105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21952601
flow loss 0.061162215
occ loss 0.15836093
time for this batch 0.2623271942138672
----------------------------------
train loss for this epoch:  0.228937
time for this epoch 41.65245056152344
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  78
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2735226
flow loss 0.07392144
occ loss 0.1995973
time for this batch 0.21045351028442383
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20558394
flow loss 0.06041336
occ loss 0.1451681
time for this batch 0.24865102767944336
----------------------------------
train loss for this epoch:  0.226723
time for this epoch 41.4185836315155
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  79
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2480248
flow loss 0.06411001
occ loss 0.18391171
time for this batch 0.22251486778259277
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2630189
flow loss 0.07221432
occ loss 0.19080117
time for this batch 0.2653355598449707
----------------------------------
train loss for this epoch:  0.227737
time for this epoch 41.366921186447144
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  80
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25266457
flow loss 0.0667992
occ loss 0.18586211
time for this batch 0.20131540298461914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19769885
flow loss 0.054230843
occ loss 0.14346567
time for this batch 0.24327635765075684
----------------------------------
train loss for this epoch:  0.227425
time for this epoch 41.3043327331543
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  81
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22589727
flow loss 0.061502825
occ loss 0.16439104
time for this batch 0.2111802101135254
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23598254
flow loss 0.06405857
occ loss 0.1719209
time for this batch 0.277996301651001
----------------------------------
train loss for this epoch:  0.227004
time for this epoch 41.578349113464355
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  82
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23954463
flow loss 0.07040868
occ loss 0.16913278
time for this batch 0.23908448219299316
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19371219
flow loss 0.054814488
occ loss 0.1388952
time for this batch 0.28201961517333984
----------------------------------
train loss for this epoch:  0.226786
time for this epoch 41.85551929473877
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  83
# batch:  125
i_batch:  0.0
the loss for this batch:  0.28767535
flow loss 0.07675408
occ loss 0.21091792
time for this batch 0.24402308464050293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27976662
flow loss 0.073091716
occ loss 0.20667177
time for this batch 0.22588157653808594
----------------------------------
train loss for this epoch:  0.227563
time for this epoch 41.725703954696655
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  84
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21414179
flow loss 0.056883052
occ loss 0.15725629
time for this batch 0.18756675720214844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24165039
flow loss 0.067020625
occ loss 0.17462677
time for this batch 0.2348341941833496
----------------------------------
train loss for this epoch:  0.224935
time for this epoch 42.14080047607422
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  85
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2617641
flow loss 0.078267835
occ loss 0.18349291
time for this batch 0.2258760929107666
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16613574
flow loss 0.052057356
occ loss 0.114076
time for this batch 0.28408026695251465
----------------------------------
train loss for this epoch:  0.226011
time for this epoch 41.776161432266235
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  86
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18891686
flow loss 0.056747716
occ loss 0.13216692
time for this batch 0.2205972671508789
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21548948
flow loss 0.06683658
occ loss 0.14865018
time for this batch 0.2801394462585449
----------------------------------
train loss for this epoch:  0.225803
time for this epoch 42.43686628341675
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  87
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1831291
flow loss 0.053809185
occ loss 0.1293176
time for this batch 0.21290087699890137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26473922
flow loss 0.06795693
occ loss 0.19677907
time for this batch 0.27499842643737793
----------------------------------
train loss for this epoch:  0.225634
time for this epoch 41.25823140144348
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  88
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19337264
flow loss 0.057306495
occ loss 0.13606367
time for this batch 0.23572397232055664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22776407
flow loss 0.06347205
occ loss 0.16428901
time for this batch 0.2765076160430908
----------------------------------
train loss for this epoch:  0.225452
time for this epoch 42.56925630569458
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  89
# batch:  125
i_batch:  0.0
the loss for this batch:  0.16001129
flow loss 0.048245482
occ loss 0.111763746
time for this batch 0.29604005813598633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24749418
flow loss 0.06199893
occ loss 0.18549249
time for this batch 0.30365562438964844
----------------------------------
train loss for this epoch:  0.224004
time for this epoch 46.2707200050354
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  90
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22294784
flow loss 0.05942814
occ loss 0.16351716
time for this batch 0.26371240615844727
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20011124
flow loss 0.055606283
occ loss 0.1445023
time for this batch 0.295459508895874
----------------------------------
train loss for this epoch:  0.225052
time for this epoch 45.46236515045166
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  91
# batch:  125
i_batch:  0.0
the loss for this batch:  0.34291768
flow loss 0.08866141
occ loss 0.25425255
time for this batch 0.2832016944885254
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22634764
flow loss 0.062191118
occ loss 0.16415374
time for this batch 0.23416376113891602
----------------------------------
train loss for this epoch:  0.22406
time for this epoch 41.62405347824097
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  92
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1952047
flow loss 0.052248262
occ loss 0.14295407
time for this batch 0.20032691955566406
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21942464
flow loss 0.05836923
occ loss 0.16105296
time for this batch 0.28084540367126465
----------------------------------
train loss for this epoch:  0.2235
time for this epoch 41.527411222457886
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  93
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26886383
flow loss 0.078737065
occ loss 0.19012289
time for this batch 0.22262954711914062
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22621939
flow loss 0.066217184
occ loss 0.15999909
time for this batch 0.27254486083984375
----------------------------------
train loss for this epoch:  0.223516
time for this epoch 41.7328884601593
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  94
# batch:  125
i_batch:  0.0
the loss for this batch:  0.236787
flow loss 0.061913874
occ loss 0.17487004
time for this batch 0.22897028923034668
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21778944
flow loss 0.05553454
occ loss 0.16225208
time for this batch 0.27350282669067383
----------------------------------
train loss for this epoch:  0.223679
time for this epoch 41.7143235206604
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  95
# batch:  125
i_batch:  0.0
the loss for this batch:  0.27300608
flow loss 0.07461398
occ loss 0.19838853
time for this batch 0.20975399017333984
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20200396
flow loss 0.06363168
occ loss 0.13836966
time for this batch 0.27565836906433105
----------------------------------
train loss for this epoch:  0.224834
time for this epoch 42.17789649963379
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  96
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17571594
flow loss 0.052566424
occ loss 0.12314717
time for this batch 0.23063969612121582
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27532274
flow loss 0.07213444
occ loss 0.20318468
time for this batch 0.24730634689331055
----------------------------------
train loss for this epoch:  0.222347
time for this epoch 42.125614166259766
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  97
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23883267
flow loss 0.06643714
occ loss 0.17239253
time for this batch 0.23118233680725098
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22397767
flow loss 0.060420673
occ loss 0.16355409
time for this batch 0.25645899772644043
----------------------------------
train loss for this epoch:  0.22237
time for this epoch 42.26485276222229
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  98
# batch:  125
i_batch:  0.0
the loss for this batch:  0.15756157
flow loss 0.04441195
occ loss 0.11314759
time for this batch 0.23082470893859863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23843364
flow loss 0.07451372
occ loss 0.16391629
time for this batch 0.2690558433532715
----------------------------------
train loss for this epoch:  0.223393
time for this epoch 41.66188287734985
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  99
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21583909
flow loss 0.061775044
occ loss 0.154061
time for this batch 0.20503544807434082
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22177769
flow loss 0.06264668
occ loss 0.1591281
time for this batch 0.25884199142456055
----------------------------------
train loss for this epoch:  0.222248
time for this epoch 41.87956666946411
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  100
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25053006
flow loss 0.060978014
occ loss 0.18954886
time for this batch 0.2267923355102539
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19535416
flow loss 0.059441708
occ loss 0.13590981
time for this batch 0.2742905616760254
----------------------------------
train loss for this epoch:  0.222284
time for this epoch 41.88302159309387
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  101
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23970485
flow loss 0.06254308
occ loss 0.17715895
time for this batch 0.2393786907196045
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15218155
flow loss 0.045845766
occ loss 0.10633378
time for this batch 0.2697463035583496
----------------------------------
train loss for this epoch:  0.222588
time for this epoch 41.7710177898407
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  102
# batch:  125
i_batch:  0.0
the loss for this batch:  0.15445173
flow loss 0.04687271
occ loss 0.10757696
time for this batch 0.23672866821289062
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18465017
flow loss 0.05286251
occ loss 0.1317853
time for this batch 0.2666313648223877
----------------------------------
train loss for this epoch:  0.221131
time for this epoch 42.182504653930664
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  103
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2311126
flow loss 0.060325608
occ loss 0.1707843
time for this batch 0.21723723411560059
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25649813
flow loss 0.06859391
occ loss 0.18790081
time for this batch 0.27520203590393066
----------------------------------
train loss for this epoch:  0.222658
time for this epoch 41.94103455543518
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  104
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22370605
flow loss 0.062660456
occ loss 0.16104253
time for this batch 0.29522228240966797
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18173878
flow loss 0.05446863
occ loss 0.12726784
time for this batch 0.28040480613708496
----------------------------------
train loss for this epoch:  0.220782
time for this epoch 42.164156675338745
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  105
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18182305
flow loss 0.051067382
occ loss 0.13075303
time for this batch 0.20215988159179688
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25734118
flow loss 0.071897164
occ loss 0.18544061
time for this batch 0.26856136322021484
----------------------------------
train loss for this epoch:  0.221138
time for this epoch 41.61906695365906
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  106
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23199661
flow loss 0.0660961
occ loss 0.1658971
time for this batch 0.23978662490844727
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29297143
flow loss 0.072449654
occ loss 0.22051835
time for this batch 0.2513697147369385
----------------------------------
train loss for this epoch:  0.221434
time for this epoch 42.991281032562256
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  107
# batch:  125
i_batch:  0.0
the loss for this batch:  0.277559
flow loss 0.07470353
occ loss 0.20285168
time for this batch 0.21252703666687012
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21174045
flow loss 0.060059644
occ loss 0.15167812
time for this batch 0.23051762580871582
----------------------------------
train loss for this epoch:  0.219521
time for this epoch 37.33855581283569
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  108
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18799287
flow loss 0.056978904
occ loss 0.13101165
time for this batch 0.19335341453552246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19736488
flow loss 0.056951374
occ loss 0.1404111
time for this batch 0.17429780960083008
----------------------------------
train loss for this epoch:  0.220198
time for this epoch 32.094547271728516
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  109
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19845124
flow loss 0.058537643
occ loss 0.13991082
time for this batch 0.14661407470703125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12900876
flow loss 0.04000761
occ loss 0.088999905
time for this batch 0.28094935417175293
----------------------------------
train loss for this epoch:  0.221554
time for this epoch 40.52922701835632
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  110
# batch:  125
i_batch:  0.0
the loss for this batch:  0.29123098
flow loss 0.070914276
occ loss 0.2203127
time for this batch 0.24455022811889648
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24196027
flow loss 0.06488582
occ loss 0.1770712
time for this batch 0.2831461429595947
----------------------------------
train loss for this epoch:  0.221065
time for this epoch 43.47985100746155
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  111
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21193945
flow loss 0.05807212
occ loss 0.15386422
time for this batch 0.21563148498535156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17697644
flow loss 0.048886783
occ loss 0.12808712
time for this batch 0.2891218662261963
----------------------------------
train loss for this epoch:  0.219304
time for this epoch 42.6476035118103
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  112
# batch:  125
i_batch:  0.0
the loss for this batch:  0.29074085
flow loss 0.08216046
occ loss 0.2085763
time for this batch 0.25671958923339844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23613
flow loss 0.065006256
occ loss 0.17112048
time for this batch 0.3067033290863037
----------------------------------
train loss for this epoch:  0.219589
time for this epoch 46.06313991546631
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  113
# batch:  125
i_batch:  0.0
the loss for this batch:  0.278207
flow loss 0.07563792
occ loss 0.20256537
time for this batch 0.24155688285827637
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22548349
flow loss 0.059422344
occ loss 0.16605827
time for this batch 0.2836167812347412
----------------------------------
train loss for this epoch:  0.219324
time for this epoch 43.34487795829773
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  114
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19846137
flow loss 0.05620601
occ loss 0.14225239
time for this batch 0.26648426055908203
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15912344
flow loss 0.04741393
occ loss 0.11170762
time for this batch 0.263167142868042
----------------------------------
train loss for this epoch:  0.218526
time for this epoch 42.2772421836853
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  115
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18201587
flow loss 0.0579648
occ loss 0.124048546
time for this batch 0.1976776123046875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25077718
flow loss 0.0643463
occ loss 0.1864277
time for this batch 0.2801206111907959
----------------------------------
train loss for this epoch:  0.219103
time for this epoch 42.24750351905823
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  116
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18177873
flow loss 0.054581765
occ loss 0.12719434
time for this batch 0.22018790245056152
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29937106
flow loss 0.07773812
occ loss 0.2216291
time for this batch 0.23135662078857422
----------------------------------
train loss for this epoch:  0.219595
time for this epoch 41.77748346328735
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  117
# batch:  125
i_batch:  0.0
the loss for this batch:  0.13742715
flow loss 0.04418909
occ loss 0.093236126
time for this batch 0.20531010627746582
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23492035
flow loss 0.06773365
occ loss 0.1671833
time for this batch 0.27465033531188965
----------------------------------
train loss for this epoch:  0.21866
time for this epoch 41.93105149269104
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  118
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19706239
flow loss 0.052432064
occ loss 0.14462781
time for this batch 0.34674501419067383
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17894603
flow loss 0.0544722
occ loss 0.1244713
time for this batch 0.282517671585083
----------------------------------
train loss for this epoch:  0.218751
time for this epoch 42.9322452545166
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  119
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19345264
flow loss 0.049019575
occ loss 0.14443098
time for this batch 0.2166001796722412
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18726231
flow loss 0.05712756
occ loss 0.1301323
time for this batch 0.2755732536315918
----------------------------------
train loss for this epoch:  0.219595
time for this epoch 42.20428681373596
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  120
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22263347
flow loss 0.058179308
occ loss 0.16445121
time for this batch 0.22710275650024414
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21619865
flow loss 0.06259569
occ loss 0.15360019
time for this batch 0.2814900875091553
----------------------------------
train loss for this epoch:  0.2183
time for this epoch 42.39837670326233
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  121
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21838075
flow loss 0.059964526
occ loss 0.15841326
time for this batch 0.21544599533081055
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18982664
flow loss 0.05843541
occ loss 0.13138835
time for this batch 0.2761216163635254
----------------------------------
train loss for this epoch:  0.218151
time for this epoch 41.94264364242554
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  122
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25425878
flow loss 0.06629129
occ loss 0.18796396
time for this batch 0.21388483047485352
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2440179
flow loss 0.064330176
occ loss 0.17968452
time for this batch 0.25794148445129395
----------------------------------
train loss for this epoch:  0.218036
time for this epoch 40.394192695617676
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  123
# batch:  125
i_batch:  0.0
the loss for this batch:  0.24709047
flow loss 0.06817693
occ loss 0.17890991
time for this batch 0.2148280143737793
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19023246
flow loss 0.056499675
occ loss 0.13373013
time for this batch 0.29401683807373047
----------------------------------
train loss for this epoch:  0.217708
time for this epoch 42.12530708312988
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  124
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19413736
flow loss 0.056859195
occ loss 0.13727532
time for this batch 0.2203385829925537
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18943799
flow loss 0.053025983
occ loss 0.13640949
time for this batch 0.2505354881286621
----------------------------------
train loss for this epoch:  0.218059
time for this epoch 42.186087131500244
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  125
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1786645
flow loss 0.049970746
occ loss 0.12869129
time for this batch 0.22788262367248535
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27100927
flow loss 0.06846429
occ loss 0.20254134
time for this batch 0.2770829200744629
----------------------------------
train loss for this epoch:  0.218475
time for this epoch 44.13061261177063
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  126
# batch:  125
i_batch:  0.0
the loss for this batch:  0.24293362
flow loss 0.0688357
occ loss 0.17409459
time for this batch 0.26584768295288086
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20193446
flow loss 0.05195322
occ loss 0.14997841
time for this batch 0.2739095687866211
----------------------------------
train loss for this epoch:  0.217585
time for this epoch 41.20704174041748
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  127
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25962692
flow loss 0.07085204
occ loss 0.18877153
time for this batch 0.2411355972290039
----------------------------------
i_batch:  64.0
the loss for this batch:  0.30560085
flow loss 0.079265736
occ loss 0.2263312
time for this batch 0.2752559185028076
----------------------------------
train loss for this epoch:  0.216929
time for this epoch 74.57909035682678
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  128
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21172437
flow loss 0.05990615
occ loss 0.15181547
time for this batch 0.3999898433685303
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22937457
flow loss 0.063131236
occ loss 0.16624044
time for this batch 0.22456932067871094
----------------------------------
train loss for this epoch:  0.217651
time for this epoch 41.94416117668152
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  129
# batch:  125
i_batch:  0.0
the loss for this batch:  0.28811038
flow loss 0.07567864
occ loss 0.21242768
time for this batch 0.23704290390014648
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21532606
flow loss 0.059957284
occ loss 0.15536605
time for this batch 0.22739338874816895
----------------------------------
train loss for this epoch:  0.217561
time for this epoch 42.19829058647156
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  130
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23525964
flow loss 0.06170663
occ loss 0.17355
time for this batch 0.20319724082946777
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24856043
flow loss 0.06999074
occ loss 0.17856641
time for this batch 0.2804100513458252
----------------------------------
train loss for this epoch:  0.218016
time for this epoch 42.40673041343689
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  131
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23918913
flow loss 0.06376412
occ loss 0.17542183
time for this batch 0.18825602531433105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22942914
flow loss 0.0629586
occ loss 0.16646716
time for this batch 0.27534055709838867
----------------------------------
train loss for this epoch:  0.216223
time for this epoch 41.292866706848145
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  132
# batch:  125
i_batch:  0.0
the loss for this batch:  0.24199821
flow loss 0.06203023
occ loss 0.17996466
time for this batch 0.22184085845947266
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2820892
flow loss 0.072885446
occ loss 0.2092001
time for this batch 0.19169306755065918
----------------------------------
train loss for this epoch:  0.216215
time for this epoch 40.32429528236389
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  133
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18958165
flow loss 0.054458637
occ loss 0.1351204
time for this batch 0.23932790756225586
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21371515
flow loss 0.05739487
occ loss 0.15631782
time for this batch 0.25901174545288086
----------------------------------
train loss for this epoch:  0.217879
time for this epoch 42.181485414505005
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  134
# batch:  125
i_batch:  0.0
the loss for this batch:  0.27856785
flow loss 0.06971441
occ loss 0.20885055
time for this batch 0.20453763008117676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21771833
flow loss 0.055815127
occ loss 0.16190045
time for this batch 0.22273612022399902
----------------------------------
train loss for this epoch:  0.215532
time for this epoch 41.18204712867737
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  135
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17137302
flow loss 0.046602666
occ loss 0.12476797
time for this batch 0.22974276542663574
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18884005
flow loss 0.054760553
occ loss 0.13407692
time for this batch 0.23755669593811035
----------------------------------
train loss for this epoch:  0.217118
time for this epoch 42.76363134384155
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  136
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23570749
flow loss 0.06291287
occ loss 0.17279144
time for this batch 0.24987459182739258
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1890979
flow loss 0.057762522
occ loss 0.13133268
time for this batch 0.2755556106567383
----------------------------------
train loss for this epoch:  0.216306
time for this epoch 41.99428653717041
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  137
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1800248
flow loss 0.052529756
occ loss 0.12749246
time for this batch 0.22927284240722656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2831176
flow loss 0.06786663
occ loss 0.21524744
time for this batch 0.2647056579589844
----------------------------------
train loss for this epoch:  0.215089
time for this epoch 40.77215242385864
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  138
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21692424
flow loss 0.060094073
occ loss 0.15682708
time for this batch 0.2267289161682129
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19249007
flow loss 0.054648098
occ loss 0.13783923
time for this batch 0.27603936195373535
----------------------------------
train loss for this epoch:  0.215356
time for this epoch 41.9412362575531
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  139
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25906718
flow loss 0.07149022
occ loss 0.1875733
time for this batch 0.24434947967529297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21534374
flow loss 0.06377142
occ loss 0.1515694
time for this batch 0.26855993270874023
----------------------------------
train loss for this epoch:  0.216722
time for this epoch 41.282649993896484
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  140
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1356153
flow loss 0.040341865
occ loss 0.09527175
time for this batch 0.21072745323181152
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22091088
flow loss 0.058896802
occ loss 0.16201137
time for this batch 0.2796821594238281
----------------------------------
train loss for this epoch:  0.215606
time for this epoch 41.77755355834961
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  141
# batch:  125
i_batch:  0.0
the loss for this batch:  0.15888327
flow loss 0.047491554
occ loss 0.111389406
time for this batch 0.22901582717895508
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2138165
flow loss 0.05819419
occ loss 0.15561962
time for this batch 0.2518174648284912
----------------------------------
train loss for this epoch:  0.215878
time for this epoch 41.60721302032471
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  142
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26242977
flow loss 0.06665526
occ loss 0.19577126
time for this batch 0.20580816268920898
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18734731
flow loss 0.05320092
occ loss 0.13414395
time for this batch 0.2843153476715088
----------------------------------
train loss for this epoch:  0.215491
time for this epoch 41.74123525619507
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  143
# batch:  125
i_batch:  0.0
the loss for this batch:  0.15428059
flow loss 0.046939764
occ loss 0.107338905
time for this batch 0.2222905158996582
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27171588
flow loss 0.070128106
occ loss 0.20158462
time for this batch 0.283750057220459
----------------------------------
train loss for this epoch:  0.215411
time for this epoch 41.622607707977295
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  144
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17428474
flow loss 0.04469056
occ loss 0.12959199
time for this batch 0.23050904273986816
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21384189
flow loss 0.061130956
occ loss 0.15270762
time for this batch 0.2815978527069092
----------------------------------
train loss for this epoch:  0.215557
time for this epoch 41.570964336395264
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  145
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2227435
flow loss 0.062598005
occ loss 0.16014248
time for this batch 0.23709583282470703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20244241
flow loss 0.056272995
occ loss 0.14616685
time for this batch 0.2829272747039795
----------------------------------
train loss for this epoch:  0.214672
time for this epoch 41.465089082717896
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  146
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2647494
flow loss 0.06440261
occ loss 0.20034313
time for this batch 0.211226224899292
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23965442
flow loss 0.065159954
occ loss 0.1744914
time for this batch 0.29022955894470215
----------------------------------
train loss for this epoch:  0.214063
time for this epoch 43.30857443809509
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  147
# batch:  125
i_batch:  0.0
the loss for this batch:  0.30577043
flow loss 0.07431197
occ loss 0.23145455
time for this batch 0.18227815628051758
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21118817
flow loss 0.063556805
occ loss 0.14762825
time for this batch 0.265033483505249
----------------------------------
train loss for this epoch:  0.214866
time for this epoch 39.934157371520996
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  148
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19139647
flow loss 0.053137064
occ loss 0.13825677
time for this batch 0.3109135627746582
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2068136
flow loss 0.054955795
occ loss 0.15185468
time for this batch 0.23604798316955566
----------------------------------
train loss for this epoch:  0.214022
time for this epoch 41.79554605484009
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  149
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21289837
flow loss 0.056360964
occ loss 0.15653461
time for this batch 0.24272370338439941
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19420256
flow loss 0.055928875
occ loss 0.13827124
time for this batch 0.22690916061401367
----------------------------------
train loss for this epoch:  0.214405
time for this epoch 42.459909439086914
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  150
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26758417
flow loss 0.06630538
occ loss 0.20127577
time for this batch 0.21882295608520508
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1860262
flow loss 0.052577846
occ loss 0.13344596
time for this batch 0.26081275939941406
----------------------------------
train loss for this epoch:  0.207508
time for this epoch 42.84970164299011
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  151
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21838078
flow loss 0.055563807
occ loss 0.16281414
time for this batch 0.23534607887268066
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22131611
flow loss 0.05806899
occ loss 0.16324425
time for this batch 0.2843050956726074
----------------------------------
train loss for this epoch:  0.206561
time for this epoch 42.37132787704468
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  152
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2561783
flow loss 0.06876506
occ loss 0.18740949
time for this batch 0.23401117324829102
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21862979
flow loss 0.05661908
occ loss 0.162008
time for this batch 0.19288325309753418
----------------------------------
train loss for this epoch:  0.205413
time for this epoch 41.8275203704834
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  153
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2063981
flow loss 0.05999014
occ loss 0.14640503
time for this batch 0.2424464225769043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20794451
flow loss 0.055647932
occ loss 0.15229343
time for this batch 0.2602558135986328
----------------------------------
train loss for this epoch:  0.205591
time for this epoch 41.996498584747314
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  154
# batch:  125
i_batch:  0.0
the loss for this batch:  0.23400104
flow loss 0.06513864
occ loss 0.16885887
time for this batch 0.23223328590393066
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21544784
flow loss 0.05736001
occ loss 0.15808438
time for this batch 0.28777337074279785
----------------------------------
train loss for this epoch:  0.205944
time for this epoch 42.011351585388184
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  155
# batch:  125
i_batch:  0.0
the loss for this batch:  0.26626837
flow loss 0.06582797
occ loss 0.20043662
time for this batch 0.24588298797607422
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24651918
flow loss 0.06264786
occ loss 0.18386777
time for this batch 0.26252031326293945
----------------------------------
train loss for this epoch:  0.20563
time for this epoch 41.38074707984924
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  156
# batch:  125
i_batch:  0.0
the loss for this batch:  0.25792325
flow loss 0.06390717
occ loss 0.19401288
time for this batch 0.21559596061706543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2176674
flow loss 0.06022596
occ loss 0.15743865
time for this batch 0.25992822647094727
----------------------------------
train loss for this epoch:  0.205594
time for this epoch 41.72551679611206
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  157
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2294323
flow loss 0.062635526
occ loss 0.16679345
time for this batch 0.24199151992797852
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15671913
flow loss 0.044516373
occ loss 0.112200655
time for this batch 0.27823305130004883
----------------------------------
train loss for this epoch:  0.205645
time for this epoch 41.39026594161987
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  158
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1835721
flow loss 0.050661433
occ loss 0.13290808
time for this batch 0.28292417526245117
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26503032
flow loss 0.071869746
occ loss 0.19315697
time for this batch 0.30738186836242676
----------------------------------
train loss for this epoch:  0.205669
time for this epoch 43.35257077217102
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  159
# batch:  125
i_batch:  0.0
the loss for this batch:  0.12965152
flow loss 0.041757625
occ loss 0.08789203
time for this batch 0.24086260795593262
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19814679
flow loss 0.056158993
occ loss 0.14198492
time for this batch 0.2850682735443115
----------------------------------
train loss for this epoch:  0.205507
time for this epoch 43.118030309677124
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  160
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18234546
flow loss 0.050439455
occ loss 0.13190348
time for this batch 0.22597646713256836
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16208196
flow loss 0.044372242
occ loss 0.11770737
time for this batch 0.30304718017578125
----------------------------------
train loss for this epoch:  0.205206
time for this epoch 43.394784688949585
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  161
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17577036
flow loss 0.04854952
occ loss 0.12721846
time for this batch 0.24975943565368652
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22403318
flow loss 0.06213473
occ loss 0.1618952
time for this batch 0.2562134265899658
----------------------------------
train loss for this epoch:  0.205484
time for this epoch 43.60386610031128
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  162
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21644096
flow loss 0.057327032
occ loss 0.15911084
time for this batch 0.25058412551879883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14962867
flow loss 0.0462454
occ loss 0.10338128
time for this batch 0.25864458084106445
----------------------------------
train loss for this epoch:  0.205237
time for this epoch 44.75456666946411
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  163
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1695727
flow loss 0.04843332
occ loss 0.12113708
time for this batch 0.21361446380615234
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20049307
flow loss 0.055868294
occ loss 0.14462176
time for this batch 0.40930628776550293
----------------------------------
train loss for this epoch:  0.205138
time for this epoch 42.40977454185486
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  164
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17399058
flow loss 0.055794474
occ loss 0.118193515
time for this batch 0.23539519309997559
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26454535
flow loss 0.070985876
occ loss 0.19355574
time for this batch 0.24124908447265625
----------------------------------
train loss for this epoch:  0.204947
time for this epoch 41.69672465324402
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  165
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21871454
flow loss 0.05852745
occ loss 0.16018379
time for this batch 0.254474401473999
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16516091
flow loss 0.046492703
occ loss 0.11866586
time for this batch 0.28757166862487793
----------------------------------
train loss for this epoch:  0.205299
time for this epoch 43.10002088546753
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  166
# batch:  125
i_batch:  0.0
the loss for this batch:  0.20719512
flow loss 0.057472095
occ loss 0.14971986
time for this batch 0.25737738609313965
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22925708
flow loss 0.058483463
occ loss 0.17077073
time for this batch 0.25655293464660645
----------------------------------
train loss for this epoch:  0.204842
time for this epoch 42.00644850730896
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  167
# batch:  125
i_batch:  0.0
the loss for this batch:  0.16968362
flow loss 0.04656805
occ loss 0.12311305
time for this batch 0.2137317657470703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.230568
flow loss 0.058031835
occ loss 0.1725329
time for this batch 0.22789525985717773
----------------------------------
train loss for this epoch:  0.204953
time for this epoch 41.83325934410095
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  168
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17287338
flow loss 0.0489288
occ loss 0.12394205
time for this batch 0.2243490219116211
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21835619
flow loss 0.057922743
occ loss 0.16043034
time for this batch 0.2635667324066162
----------------------------------
train loss for this epoch:  0.204714
time for this epoch 42.79268288612366
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  169
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1489692
flow loss 0.04259062
occ loss 0.10637648
time for this batch 0.2522766590118408
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25594637
flow loss 0.06328515
occ loss 0.19265784
time for this batch 0.21370935440063477
----------------------------------
train loss for this epoch:  0.204752
time for this epoch 34.54697632789612
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  170
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19134444
flow loss 0.05356236
occ loss 0.13777943
time for this batch 0.2964956760406494
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12377656
flow loss 0.038140032
occ loss 0.08563481
time for this batch 0.3330874443054199
----------------------------------
train loss for this epoch:  0.204713
time for this epoch 44.99956250190735
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  171
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17428608
flow loss 0.04655173
occ loss 0.12773235
time for this batch 0.23907923698425293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25466868
flow loss 0.06506466
occ loss 0.18960054
time for this batch 0.22305774688720703
----------------------------------
train loss for this epoch:  0.204913
time for this epoch 39.15002751350403
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  172
# batch:  125
i_batch:  0.0
the loss for this batch:  0.2418112
flow loss 0.06960455
occ loss 0.17220354
time for this batch 0.24449515342712402
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20872624
flow loss 0.059464402
occ loss 0.14925897
time for this batch 0.28780531883239746
----------------------------------
train loss for this epoch:  0.204235
time for this epoch 43.31948447227478
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  173
# batch:  125
i_batch:  0.0
the loss for this batch:  0.17534937
flow loss 0.051361926
occ loss 0.12398487
time for this batch 0.23879241943359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1911637
flow loss 0.05308005
occ loss 0.13808072
time for this batch 0.2928144931793213
----------------------------------
train loss for this epoch:  0.204676
time for this epoch 41.69868731498718
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  174
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21232168
flow loss 0.05608559
occ loss 0.15623324
time for this batch 0.23205041885375977
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16554348
flow loss 0.049815714
occ loss 0.11572546
time for this batch 0.23040246963500977
----------------------------------
train loss for this epoch:  0.204722
time for this epoch 42.11300277709961
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  175
# batch:  125
i_batch:  0.0
the loss for this batch:  0.18218865
flow loss 0.055352807
occ loss 0.12683322
time for this batch 0.19911789894104004
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19288406
flow loss 0.05811435
occ loss 0.134767
time for this batch 0.260974645614624
----------------------------------
train loss for this epoch:  0.204493
time for this epoch 41.75030708312988
No_decrease:  23
----------------an epoch starts-------------------
i_epoch:  176
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19099781
flow loss 0.051372446
occ loss 0.13962275
time for this batch 0.26321887969970703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16621423
flow loss 0.04818902
occ loss 0.11802298
time for this batch 0.26686596870422363
----------------------------------
train loss for this epoch:  0.204523
time for this epoch 41.77340841293335
No_decrease:  24
----------------an epoch starts-------------------
i_epoch:  177
# batch:  125
i_batch:  0.0
the loss for this batch:  0.19908738
flow loss 0.056425314
occ loss 0.14265929
time for this batch 0.22936248779296875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18749772
flow loss 0.054349273
occ loss 0.13314578
time for this batch 0.26246166229248047
----------------------------------
train loss for this epoch:  0.204394
time for this epoch 42.009405851364136
No_decrease:  25
----------------an epoch starts-------------------
i_epoch:  178
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21519817
flow loss 0.05989522
occ loss 0.15529965
time for this batch 0.21381640434265137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22106177
flow loss 0.060625277
occ loss 0.16043341
time for this batch 0.29407739639282227
----------------------------------
train loss for this epoch:  0.204114
time for this epoch 40.89222431182861
No_decrease:  26
----------------an epoch starts-------------------
i_epoch:  179
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21283524
flow loss 0.05645304
occ loss 0.15637906
time for this batch 0.25467824935913086
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23521602
flow loss 0.06693491
occ loss 0.16827779
time for this batch 0.25475311279296875
----------------------------------
train loss for this epoch:  0.204356
time for this epoch 42.15910220146179
No_decrease:  27
----------------an epoch starts-------------------
i_epoch:  180
# batch:  125
i_batch:  0.0
the loss for this batch:  0.22205174
flow loss 0.058937307
occ loss 0.16311118
time for this batch 0.26519274711608887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19125162
flow loss 0.05429957
occ loss 0.13694932
time for this batch 0.25635862350463867
----------------------------------
train loss for this epoch:  0.204259
time for this epoch 42.96978282928467
No_decrease:  28
----------------an epoch starts-------------------
i_epoch:  181
# batch:  125
i_batch:  0.0
the loss for this batch:  0.1611499
flow loss 0.05323099
occ loss 0.1079165
time for this batch 0.20953798294067383
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24725711
flow loss 0.06226711
occ loss 0.1849865
time for this batch 0.23098969459533691
----------------------------------
train loss for this epoch:  0.204084
time for this epoch 41.2967963218689
No_decrease:  29
----------------an epoch starts-------------------
i_epoch:  182
# batch:  125
i_batch:  0.0
the loss for this batch:  0.21133523
flow loss 0.056435525
occ loss 0.15489668
time for this batch 0.24483084678649902
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20561323
flow loss 0.053590134
occ loss 0.1520201
time for this batch 0.27539753913879395
----------------------------------
train loss for this epoch:  0.204336
time for this epoch 41.0474693775177
Early stop at the 183-th epoch

7: apply the model to vali and test¶

In [14]:
def apply_to_vali_test(model, vt, f_o_mean_std):
    f = vt["flow"]
    f_m = vt["flow_mask"].to(device)
    o = vt["occupancy"]
    o_m = vt["occupancy_mask"].to(device)
    f_mae, f_rmse, o_mae, o_rmse  = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
    print ("flow_mae", f_mae)
    print ("flow_rmse", f_rmse)
    print ("occ_mae", o_mae)
    print ("occ_rmse", o_rmse)
    return f_mae, f_rmse, o_mae, o_rmse

Validate¶

In [15]:
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
    apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 35.706013219496114
flow_rmse 55.067228280538785
occ_mae 0.03881964723069924
occ_rmse 0.07973054776639152

Test¶

In [16]:
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
    apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 33.70526106641875
flow_rmse 52.30127946606076
occ_mae 0.03179610162832142
occ_rmse 0.06858186673505016
In [ ]:
 
In [ ]:
 
In [ ]: